public void buildClassifier(Instances data) throws Exception {

		getCapabilities().testWithFail(data);
		data = new Instances(data);
		data.deleteWithMissingClass();

		// unpruned CART decision tree
		if (!m_Prune) {

			// calculate sorted indices and weights, and compute initial class
			// counts.
			int[][] sortedIndices = new int[data.numAttributes()][0];
			double[][] weights = new double[data.numAttributes()][0];
			double[] classProbs = new double[data.numClasses()];
			double totalWeight = computeSortedInfo(data, sortedIndices,
					weights, classProbs);

			makeTree(data, data.numInstances(), sortedIndices, weights,
					classProbs, totalWeight, m_minNumObj, m_Heuristic);
			return;
		}

		Random random = new Random(m_Seed);
		Instances cvData = new Instances(data);
		cvData.randomize(random);
		cvData = new Instances(cvData, 0,
				(int) (cvData.numInstances() * m_SizePer) - 1);
		cvData.stratify(m_numFoldsPruning);

		double[][] alphas = new double[m_numFoldsPruning][];
		double[][] errors = new double[m_numFoldsPruning][];

		// calculate errors and alphas for each fold
		for (int i = 0; i < m_numFoldsPruning; i++) {

			// for every fold, grow tree on training set and fix error on test
			// set.
			Instances train = cvData.trainCV(m_numFoldsPruning, i);
			Instances test = cvData.testCV(m_numFoldsPruning, i);

			// calculate sorted indices and weights, and compute initial class
			// counts for each fold
			int[][] sortedIndices = new int[train.numAttributes()][0];
			double[][] weights = new double[train.numAttributes()][0];
			double[] classProbs = new double[train.numClasses()];
			double totalWeight = computeSortedInfo(train, sortedIndices,
					weights, classProbs);

			makeTree(train, train.numInstances(), sortedIndices, weights,
					classProbs, totalWeight, m_minNumObj, m_Heuristic);

			int numNodes = numInnerNodes();
			alphas[i] = new double[numNodes + 2];
			errors[i] = new double[numNodes + 2];

			// prune back and log alpha-values and errors on test set
			prune(alphas[i], errors[i], test);
		}

		// calculate sorted indices and weights, and compute initial class
		// counts on all training instances
		int[][] sortedIndices = new int[data.numAttributes()][0];
		double[][] weights = new double[data.numAttributes()][0];
		double[] classProbs = new double[data.numClasses()];
		double totalWeight = computeSortedInfo(data, sortedIndices, weights,
				classProbs);

		// build tree using all the data
		makeTree(data, data.numInstances(), sortedIndices, weights, classProbs,
				totalWeight, m_minNumObj, m_Heuristic);

		int numNodes = numInnerNodes();

		double[] treeAlphas = new double[numNodes + 2];

		// prune back and log alpha-values
		int iterations = prune(treeAlphas, null, null);

		double[] treeErrors = new double[numNodes + 2];

		// for each pruned subtree, find the cross-validated error
		for (int i = 0; i <= iterations; i++) {
			// compute midpoint alphas
			double alpha = Math.sqrt(treeAlphas[i] * treeAlphas[i + 1]);
			double error = 0;
			for (int k = 0; k < m_numFoldsPruning; k++) {
				int l = 0;
				while (alphas[k][l] <= alpha)
					l++;
				error += errors[k][l - 1];
			}
			treeErrors[i] = error / m_numFoldsPruning;
		}

		// find best alpha
		int best = -1;
		double bestError = Double.MAX_VALUE;
		for (int i = iterations; i >= 0; i--) {
			if (treeErrors[i] < bestError) {
				bestError = treeErrors[i];
				best = i;
			}
		}

		// 1 SE rule to choose expansion
		if (m_UseOneSE) {
			double oneSE = Math.sqrt(bestError * (1 - bestError)
					/ (data.numInstances()));
			for (int i = iterations; i >= 0; i--) {
				if (treeErrors[i] <= bestError + oneSE) {
					best = i;
					break;
				}
			}
		}

		double bestAlpha = Math.sqrt(treeAlphas[best] * treeAlphas[best + 1]);

		// "unprune" final tree (faster than regrowing it)
		unprune();
		prune(bestAlpha);
	}

